runtime.mspan.base (method)
91 uses
runtime (current package)
arena.go#L554: base := s.base()
arena.go#L601: offset := addr - s.base()
arena.go#L666: offset := addr - s.base()
arena.go#L768: x := unsafe.Pointer(span.base())
arena.go#L775: gcmarknewobject(span, span.base())
arena.go#L780: racemalloc(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L785: msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L791: rzStart := span.base() + span.elemsize
arena.go#L793: asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L805: profilealloc(mp, unsafe.Pointer(span.base()), userArenaChunkBytes)
arena.go#L878: sysFault(unsafe.Pointer(s.base()), s.npages*pageSize)
arena.go#L907: racefree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L945: racefree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L948: msanfree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L951: asanpoison(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L954: valgrindFree(unsafe.Pointer(s.base()))
arena.go#L1004: base = s.base()
arena.go#L1065: s.limit = s.base() + s.elemsize
arena.go#L1104: memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize)
heapdump.go#L463: p := unsafe.Pointer(s.base() + uintptr(spf.special.offset))
heapdump.go#L485: p := s.base()
heapdump.go#L662: p := s.base() + uintptr(spp.special.offset)
malloc.go#L943: return gclinkptr(uintptr(result)*s.elemsize + s.base())
malloc.go#L979: v = gclinkptr(uintptr(freeIndex)*s.elemsize + s.base())
malloc.go#L1557: x := unsafe.Pointer(span.base())
mbitmap.go#L343: return span.base() + span.objIndex(addr)*span.elemsize
mbitmap.go#L412: } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
mbitmap.go#L553: return heapBitsSlice(span.base(), pageSize, span.elemsize)
mbitmap.go#L555: return heapBitsSlice(span.base(), span.npages*pageSize, span.elemsize)
mbitmap.go#L586: hbitsBase, _ := spanHeapBitsRange(span.base(), span.npages*pageSize, span.elemsize)
mbitmap.go#L597: i := (addr - span.base()) / goarch.PtrSize / ptrBits
mbitmap.go#L598: j := (addr - span.base()) / goarch.PtrSize % ptrBits
mbitmap.go#L653: dstBase, _ := spanHeapBitsRange(span.base(), pageSize, span.elemsize)
mbitmap.go#L655: o := (x - span.base()) / goarch.PtrSize
mbitmap.go#L1206: return s.divideByElemSize(p - s.base())
mbitmap.go#L1284: print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
mbitmap.go#L1336: if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
mbitmap.go#L1350: base = s.base() + objIndex*s.elemsize
mcache.go#L264: s.limit = s.base() + size
mcheckmark.go#L170: lastTinyBlock = s.base() + sp.offset
mcheckmark.go#L327: if !yield(s.base()+sp.offset, s, sp) {
mcleanup.go#L163: offset := c.ptr - span.base()
mgcmark.go#L418: p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
mgcmark.go#L1471: if b == s.base() {
mgcmark.go#L1477: for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
mgcmark.go#L1487: n = s.base() + s.elemsize - b
mgcmark.go#L1489: tp = s.typePointersOfUnchecked(s.base())
mgcmark.go#L1632: obj := span.base() + idx*span.elemsize
mgcmark.go#L1691: arena, pageIdx, pageMask := pageIndexOf(span.base())
mgcmark.go#L1724: print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
mgcmark.go#L1787: arena, pageIdx, pageMask := pageIndexOf(span.base())
mgcsweep.go#L557: p := s.base() + objIndex*size
mgcsweep.go#L563: endOffset := p - s.base() + size
mgcsweep.go#L580: p := s.base() + uintptr(special.offset)
mgcsweep.go#L596: p := s.base() + uintptr(special.offset)
mgcsweep.go#L623: x := s.base() + i*s.elemsize
mgcsweep.go#L831: sysFault(unsafe.Pointer(s.base()), size)
mgcsweep.go#L864: addr := s.base() + i*s.elemsize
mgcwork.go#L449: newb := (*workbuf)(unsafe.Pointer(s.base() + i))
mheap.go#L522: func (s *mspan) base() uintptr {
mheap.go#L678: if s == nil || b < s.base() {
mheap.go#L753: if s == nil || s.state.get() != mSpanInUse || p < s.base() || p >= s.limit {
mheap.go#L1491: s.limit = s.base() + uintptr(s.elemsize)*uintptr(s.nelems)
mheap.go#L1518: h.setSpans(s.base(), npages, s)
mheap.go#L1526: arena, pageIdx, pageMask := pageIndexOf(s.base())
mheap.go#L1646: base := unsafe.Pointer(s.base())
mheap.go#L1652: base := unsafe.Pointer(s.base())
mheap.go#L1657: base := s.base()
mheap.go#L1689: base := s.base()
mheap.go#L1709: print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
mheap.go#L1715: arena, pageIdx, pageMask := pageIndexOf(s.base())
mheap.go#L1747: h.pages.free(s.base(), s.npages)
mheap.go#L2026: arenaPage := (s.base() / pageSize) % pagesPerArena
mheap.go#L2027: ai := arenaIndex(s.base())
mheap.go#L2034: arenaPage := (s.base() / pageSize) % pagesPerArena
mheap.go#L2035: ai := arenaIndex(s.base())
mheap.go#L2058: offset := uintptr(p) - span.base()
mheap.go#L2096: offset := uintptr(p) - span.base()
mheap.go#L2289: offset := ptr - span.base()
mheap.go#L2335: offset := ptr - span.base()
mheap.go#L2412: offset := ptr - span.base()
mheap.go#L2648: offset := uintptr(p) - span.base()
mwbbuf.go#L256: arena, pageIdx, pageMask := pageIndexOf(span.base())
signal_unix.go#L410: if s != nil && s.state.get() == mSpanManual && s.base() < sp && sp < s.limit {
signal_unix.go#L411: gp := *(**g)(unsafe.Pointer(s.base()))
stack.go#L213: x := gclinkptr(s.base() + i)
stack.go#L429: v = unsafe.Pointer(s.base())
stack.go#L536: println(hex(s.base()), v)
synctest.go#L395: offset := uintptr(p) - span.base()
traceallocfree.go#L71: x := s.base() + i*s.elemsize
traceallocfree.go#L109: return traceArg(uint64(s.base())-trace.minPageHeapAddr) / pageSize